%matplotlib inline
# import network warper demo
import warp_demo_3 as warper
# a necessary library for writing out our transform rules mathematically
from sympy import symbols, lambdify, sin, cos, tanh, exp, log, Max, Piecewise, And
from ipywidgets import interact
import sys
sys.path.append('../demo_python_backend_files')
# our home-made warper
demo = warper.warp_demo()
# make instance of warper
x1,x2 = symbols('x1 x2')
# define your own rules here using elementary functions like sin, cos, tanh, sinc, or any polynomial combination of x1 and x2
rule1 = x1*x1 - 4
rule2 = x2*x2 - 4
# these two lines feed the transformation rules into the warping function and produce a toy dataset based on them
demo.define_rule(rule1,rule2)
demo.make_pts()
# this line creates an instance of the slider - if you do not have bokeh installed you can use the demo1.transformation_slider - this uses matplotlib as the backend (and is considerably slower)
demo.transformation_slider()
# our home-made warper
demo = warper.warp_demo()
# make instance of warper
x1,x2 = symbols('x1 x2')
# define your own rules here using elementary functions like sin, cos, tanh, sinc, or any polynomial combination of x1 and x2
rule1 = tanh(x1 + x2 + tanh(x1))
rule2 = tanh(0.1*x1)
# these two lines feed the transformation rules into the warping function and produce a toy dataset based on them
demo.define_rule(rule1,rule2)
demo.make_pts()
# this line creates an instance of the slider - if you do not have bokeh installed you can use the demo1.transformation_slider - this uses matplotlib as the backend (and is considerably slower)
demo.transformation_slider()
# import statements
%matplotlib inline
import ml_optimization_1dim_sliders as demo
demo = demo.ml_optimization_1dim_sliders()
# call the demonstration
csvname = 'toy_regression_data.csv'
demo.load_data(csvname)
# run gradient descent
demo.run_lin_regression_grad_descent(inits = [-2.5,-2.5],max_its = 60)
# run slider
demo.fitting_slider(xlabel = 'intercept', ylabel= 'slope', view = [30,70], fit_type = 'line fit')
# import statements
%matplotlib inline
import ml_optimization_1dim_sliders as demo
demo = demo.ml_optimization_1dim_sliders()
# call the demonstration
csvname = 'sinusoidal_data.csv'
demo.load_data(csvname,'sin')
# run gradient descent
demo.run_lin_regression_grad_descent(inits = [-2.5,2.5],max_its = 30)
# run slider
demo.fitting_slider(xlabel = 'phase', ylabel= 'amplitude',view = [60,20], fit_type = 'sine fit')
# import statements
%matplotlib inline
import ml_optimization_1dim_sliders as demo
demo = demo.ml_optimization_1dim_sliders()
# call the demonstration
csvname = 'logistic_1dim_dataset.csv'
demo.load_data(csvname,'logistic')
# run gradient descent
demo.run_logistic_regression_grad_descent(inits = [-2.5,-2.5],max_its = 20)
# run slider
demo.fitting_slider(xlabel = 'intercept', ylabel= 'slope',view = [40,30], fit_type = 'logistic fit')
# import statements
%matplotlib inline
import ml_optimization_1dim_sliders as demo
demo = demo.ml_optimization_1dim_sliders()
# call the demonstration
csvname = 'logistic_1dim_dataset.csv'
demo.load_data(csvname,'logistic')
# run gradient descent
demo.run_logistic_regression_grad_descent(inits = [-2.5,-2.5],max_its = 20)
# run slider
demo.classification_slider(xlabel = 'intercept', ylabel= 'slope',view = [40,30], fit_type = 'logistic fit')
# import statements
%matplotlib inline
import classification_optimization_sliders as demo
demo = demo.classification_optimization_sliders()
# call the demonstration
csvname = 'linear_classification_data.csv'
demo.load_data(csvname)
#run slider
demo.animate_fit(max_its = 40,view = [20,-10])
# import statements
%matplotlib inline
import classification_optimization_sliders as demo
demo = demo.classification_optimization_sliders()
# call the demonstration
csvname = '2eggs_dataset.csv'
demo.load_data(csvname)
#run slider
demo.animate_fit(kernel = 'rbf',gamma = 10,solver = 'newtons method',max_its = 10,view = [20,-10])